TerraformのEC2でユーザデータのyumが動かない現象を解消してみた
こんにちは!着々と一人暮らしのためのお引越しを進めているつくぼし(tsukuboshi0755)です!
TerraformでEC2を構築していた際に、EC2のユーザデータに記載していたyumコマンドが正常に動いていない事がありました。
意外と見落としがちな箇所だったので、共有してみたいと思います!
環境
$ terraform -v Terraform v1.1.7 on darwin_arm64
検証構成図
Internet Gatewayから、Public Subnet上のNAT Gatewayを介して、Private SubnetのEC2へのインターネット経路を確保します。
EC2にはSSMのセッションマネージャ機能でログインします。
EC2作成後、ユーザデータでyum update
コマンドを実施するように設定します。
何があったの?
①tfファイルを以下の通り作成
$ tree . ├── ec2.tf ├── iam.tf ├── main.tf ├── securitygroup.tf ├── src │ └── user_data.tpl ├── variable.tf └── vpc.tf
ec2.tf
# ==================== # # AMI # # ==================== # 最新版のAmazonLinux2のAMI情報 data "aws_ami" "tf_ami" { most_recent = true owners = ["amazon"] filter { name = "architecture" values = ["x86_64"] } filter { name = "root-device-type" values = ["ebs"] } filter { name = "name" values = ["amzn2-ami-hvm-*"] } filter { name = "virtualization-type" values = ["hvm"] } filter { name = "block-device-mapping.volume-type" values = ["gp2"] } filter { name = "state" values = ["available"] } } # ==================== # # EC2 Instance # # ==================== resource "aws_instance" "tf_instance" { ami = data.aws_ami.tf_ami.image_id instance_type = var.instance_type subnet_id = aws_subnet.tf_subnet_2.id associate_public_ip_address = false vpc_security_group_ids = [aws_security_group.tf_sg.id] iam_instance_profile = aws_iam_instance_profile.tf_instance_profile.name tags = { Name = "${var.project}-${var.environment}-ec2" Project = var.project Env = var.environment } }
iam.tf
# ==================== # # IAM # # ==================== data "aws_iam_policy_document" "tf_policy_document" { statement { actions = ["sts:AssumeRole"] principals { type = "Service" identifiers = ["ec2.amazonaws.com"] } } } resource "aws_iam_role" "tf_iam_role" { name = "tf_iam_role" assume_role_policy = data.aws_iam_policy_document.tf_policy_document.json } resource "aws_iam_role_policy_attachment" "tf_iam_role_policy_attachment" { role = aws_iam_role.tf_iam_role.name policy_arn = "arn:aws:iam::aws:policy/AmazonSSMManagedInstanceCore" } resource "aws_iam_instance_profile" "tf_instance_profile" { name = "tf_instance_profile" role = aws_iam_role.tf_iam_role.name }
src/user_data.tpl
#!/bin/bash yum update -y
main.tf
# ==================== # # Terraform # # ==================== terraform { required_version = ">=1.0.0" required_providers { aws = { source = "hashicorp/aws" version = "~> 4.0" } } } # ==================== # # Provider # # ==================== provider "aws" { region = var.aws_region }
securitygroup.tf
# ==================== # # Security Group # # ==================== resource "aws_security_group" "tf_sg" { name = "tf_sg" vpc_id = aws_vpc.tf_vpc.id tags = { Name = "${var.project}-${var.environment}-sg" Project = var.project Env = var.environment } } # アウトバウンドルール(全開放) resource "aws_security_group_rule" "out_all" { security_group_id = aws_security_group.tf_sg.id type = "egress" cidr_blocks = ["0.0.0.0/0"] from_port = 0 to_port = 0 protocol = "-1" }
vpc.tf
# ==================== # # VPC # # ==================== resource "aws_vpc" "tf_vpc" { cidr_block = var.vpc_cidr_block enable_dns_support = var.vpc_enable_dns_support enable_dns_hostnames = var.vpc_enable_dns_hostnames tags = { Name = "${var.project}-${var.environment}-vpc" Project = var.project Env = var.environment } } # ==================== # # Subnet # # ==================== #パプリックサブネット resource "aws_subnet" "tf_subnet_1" { vpc_id = aws_vpc.tf_vpc.id cidr_block = var.subnet_cidr_block_1 map_public_ip_on_launch = true availability_zone = var.availability_zone tags = { Name = "${var.project}-${var.environment}-subnet-1" Project = var.project Env = var.environment } } #プライベートサブネット resource "aws_subnet" "tf_subnet_2" { vpc_id = aws_vpc.tf_vpc.id cidr_block = var.subnet_cidr_block_2 map_public_ip_on_launch = false availability_zone = var.availability_zone tags = { Name = "${var.project}-${var.environment}-subnet-2" Project = var.project Env = var.environment } } # ==================== # # Internet Gateway # # ==================== resource "aws_internet_gateway" "tf_igw" { vpc_id = aws_vpc.tf_vpc.id tags = { Name = "${var.project}-${var.environment}-igw" Project = var.project Env = var.environment } } # ==================== # # Elastic IP # # ==================== resource "aws_eip" "tf_eip" { vpc = true depends_on = [aws_internet_gateway.tf_igw] tags = { Name = "${var.project}-${var.environment}-eip" Project = var.project Env = var.environment } } # ==================== # # Nat Gateway # # ==================== resource "aws_nat_gateway" "tf_ngw" { allocation_id = aws_eip.tf_eip.id subnet_id = aws_subnet.tf_subnet_1.id tags = { Name = "${var.project}-${var.environment}-ngw" } depends_on = [aws_eip.tf_eip] } # ==================== # # Route Table # # ==================== #パブリックルートテーブル resource "aws_route_table" "tf_public_rt" { vpc_id = aws_vpc.tf_vpc.id tags = { Name = "${var.project}-${var.environment}-rt" Project = var.project Env = var.environment } } resource "aws_route" "tf_public_route" { route_table_id = aws_route_table.tf_public_rt.id gateway_id = aws_internet_gateway.tf_igw.id destination_cidr_block = "0.0.0.0/0" } resource "aws_route_table_association" "tf_public_subrt" { subnet_id = aws_subnet.tf_subnet_1.id route_table_id = aws_route_table.tf_public_rt.id } #プライベートルートテーブル resource "aws_route_table" "tf_private_rt" { vpc_id = aws_vpc.tf_vpc.id tags = { Name = "${var.project}-${var.environment}-private-rt" Project = var.project Env = var.environment } } resource "aws_route" "tf_private_route" { route_table_id = aws_route_table.tf_private_rt.id gateway_id = aws_nat_gateway.tf_ngw.id destination_cidr_block = "0.0.0.0/0" } resource "aws_route_table_association" "tf_private_subrt_2" { subnet_id = aws_subnet.tf_subnet_2.id route_table_id = aws_route_table.tf_private_rt.id }
variables.tf
# ==================== # # Variables # # ==================== variable "aws_region" { default = "ap-northeast-1" } variable "project" { default = "minimum" } variable "environment" { default = "test" } # EC2 # variable "instance_type" { default = "t2.micro" } variable "user_data_file" { default = "./src/user_data.tpl" } # VPC # variable "vpc_cidr_block" { default = "10.0.0.0/16" } variable "vpc_enable_dns_support" { default = "true" } variable "vpc_enable_dns_hostnames" { default = "false" } variable "subnet_cidr_block_1" { default = "10.0.0.0/24" } variable "subnet_cidr_block_2" { default = "10.0.1.0/24" } variable "availability_zone" { default = "ap-northeast-1a" }
③terraform apply
コマンドを実施し、リソースを構築
④SSMセッションマネージャを用いて、構築したEC2インスタンスにログイン
⑤/var/log/cloud-init-output.log
(ユーザデータの実行結果ログ)を見ると、ユーザデータのyum update
コマンドが失敗してる...?
sh-4.2$ sudo cat /var/log/cloud-init-output.log Cloud-init v. 19.3-45.amzn2 running 'init-local' at Thu, 28 Apr 2022 01:23:57 +0000. Up 6.57 seconds. Cloud-init v. 19.3-45.amzn2 running 'init' at Thu, 28 Apr 2022 01:23:58 +0000. Up 7.71 seconds. ci-info: ++++++++++++++++++++++++++++++++++++++Net device info++++++++++++++++++++++++++++++++++++++ ci-info: +--------+------+----------------------------+---------------+--------+-------------------+ ci-info: | Device | Up | Address | Mask | Scope | Hw-Address | ci-info: +--------+------+----------------------------+---------------+--------+-------------------+ ci-info: | eth0 | True | 10.0.1.118 | 255.255.255.0 | global | 06:1b:09:68:41:7b | ci-info: | eth0 | True | fe80::41b:9ff:fe68:417b/64 | . | link | 06:1b:09:68:41:7b | ci-info: | lo | True | 127.0.0.1 | 255.0.0.0 | host | . | ci-info: | lo | True | ::1/128 | . | host | . | ci-info: +--------+------+----------------------------+---------------+--------+-------------------+ ci-info: ++++++++++++++++++++++++++++++Route IPv4 info+++++++++++++++++++++++++++++++ ci-info: +-------+-----------------+----------+-----------------+-----------+-------+ ci-info: | Route | Destination | Gateway | Genmask | Interface | Flags | ci-info: +-------+-----------------+----------+-----------------+-----------+-------+ ci-info: | 0 | 0.0.0.0 | 10.0.1.1 | 0.0.0.0 | eth0 | UG | ci-info: | 1 | 10.0.1.0 | 0.0.0.0 | 255.255.255.0 | eth0 | U | ci-info: | 2 | 169.254.169.254 | 0.0.0.0 | 255.255.255.255 | eth0 | UH | ci-info: +-------+-----------------+----------+-----------------+-----------+-------+ ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++ ci-info: +-------+-------------+---------+-----------+-------+ ci-info: | Route | Destination | Gateway | Interface | Flags | ci-info: +-------+-------------+---------+-----------+-------+ ci-info: | 9 | fe80::/64 | :: | eth0 | U | ci-info: | 11 | local | :: | eth0 | U | ci-info: | 12 | ff00::/8 | :: | eth0 | U | ci-info: +-------+-------------+---------+-----------+-------+ Cloud-init v. 19.3-45.amzn2 running 'modules:config' at Thu, 28 Apr 2022 01:24:00 +0000. Up 9.81 seconds. Loaded plugins: extras_suggestions, langpacks, priorities, update-motd One of the configured repositories failed (Unknown), and yum doesn't have enough cached data to continue. At this point the only safe thing yum can do is fail. There are a few ways to work "fix" this: 1. Contact the upstream for the repository and get them to fix the problem. 2. Reconfigure the baseurl/etc. for the repository, to point to a working upstream. This is most often useful if you are using a newer distribution release than is supported by the repository (and the packages for the previous distribution release still work). 3. Run the command with the repository temporarily disabled yum --disablerepo=<repoid> ... 4. Disable the repository permanently, so yum won't use it by default. Yum will then just ignore the repository until you permanently enable it again or use --enablerepo for temporary usage: yum-config-manager --disable <repoid> or subscription-manager repos --disable=<repoid> 5. Configure the failing repository to be skipped, if it is unavailable. Note that yum will try to contact the repo. when it runs most commands, so will have to try and fail each time (and thus. yum will be be much slower). If it is a very temporary problem though, this is often a nice compromise: yum-config-manager --save --setopt=<repoid>.skip_if_unavailable=true Cannot find a valid baseurl for repo: amzn2-core/2/x86_64 Could not retrieve mirrorlist https://amazonlinux-2-repos-ap-northeast-1.s3.dualstack.ap-northeast-1.amazonaws.com/2/core/latest/x86_64/mirror.list error was 12: Timeout on https://amazonlinux-2-repos-ap-northeast-1.s3.dualstack.ap-northeast-1.amazonaws.com/2/core/latest/x86_64/mirror.list: (28, 'Failed to connect to amazonlinux-2-repos-ap-northeast-1.s3.dualstack.ap-northeast-1.amazonaws.com port 443 after 2702 ms: Connection timed out') Apr 28 01:24:21 cloud-init[3008]: util.py[WARNING]: Package upgrade failed Apr 28 01:24:21 cloud-init[3008]: cc_package_update_upgrade_install.py[WARNING]: 1 failed with exceptions, re-raising the last one Apr 28 01:24:21 cloud-init[3008]: util.py[WARNING]: Running module package-update-upgrade-install (<module 'cloudinit.config.cc_package_update_upgrade_install' from '/usr/lib/python2.7/site-packages/cloudinit/config/cc_package_update_upgrade_install.pyc'>) failed Cloud-init v. 19.3-45.amzn2 running 'modules:final' at Thu, 28 Apr 2022 01:24:21 +0000. Up 30.74 seconds. ci-info: no authorized ssh keys fingerprints found for user ec2-user. Cloud-init v. 19.3-45.amzn2 finished at Thu, 28 Apr 2022 01:24:21 +0000. Datasource DataSourceEc2. Up 30.90 seconds
エラー原因
結論から言うと、まだインターネットへの経路が確保されていない段階で、EC2インスタンス上でユーザデータによる初期化が行われていた事が原因でした。
terraform apply
コマンドの出力を眺めていた際に、NAT Gatewayより先にEC2インスタンスが出来上がっていたのに気づいたのがきっかけです。
NAT Gatewayは作成に2-3分程度かかるので、特に何も指定しないと先にEC2インスタンスが作られてしまうようです。。。
解決方法
depends_on
を用いて、明示的にNAT Gatewayの後にEC2インスタンスを作成するようにしました。
# ==================== # # EC2 Instance # # ==================== resource "aws_instance" "tf_instance" { #(中略) depends_on = [ aws_nat_gateway.tf_ngw ] }
結果、無事ユーザデータが正常に実行されるようになりました!
sh-4.2$ sudo cat /var/log/cloud-init-output.log Cloud-init v. 19.3-45.amzn2 running 'init-local' at Thu, 28 Apr 2022 01:39:51 +0000. Up 8.09 seconds. Cloud-init v. 19.3-45.amzn2 running 'init' at Thu, 28 Apr 2022 01:39:52 +0000. Up 9.31 seconds. ci-info: ++++++++++++++++++++++++++++++++++++++Net device info+++++++++++++++++++++++++++++++++++++++ ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ ci-info: | Device | Up | Address | Mask | Scope | Hw-Address | ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ ci-info: | eth0 | True | 10.0.1.241 | 255.255.255.0 | global | 06:3b:6c:95:aa:6d | ci-info: | eth0 | True | fe80::43b:6cff:fe95:aa6d/64 | . | link | 06:3b:6c:95:aa:6d | ci-info: | lo | True | 127.0.0.1 | 255.0.0.0 | host | . | ci-info: | lo | True | ::1/128 | . | host | . | ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ ci-info: ++++++++++++++++++++++++++++++Route IPv4 info+++++++++++++++++++++++++++++++ ci-info: +-------+-----------------+----------+-----------------+-----------+-------+ ci-info: | Route | Destination | Gateway | Genmask | Interface | Flags | ci-info: +-------+-----------------+----------+-----------------+-----------+-------+ ci-info: | 0 | 0.0.0.0 | 10.0.1.1 | 0.0.0.0 | eth0 | UG | ci-info: | 1 | 10.0.1.0 | 0.0.0.0 | 255.255.255.0 | eth0 | U | ci-info: | 2 | 169.254.169.254 | 0.0.0.0 | 255.255.255.255 | eth0 | UH | ci-info: +-------+-----------------+----------+-----------------+-----------+-------+ ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++ ci-info: +-------+-------------+---------+-----------+-------+ ci-info: | Route | Destination | Gateway | Interface | Flags | ci-info: +-------+-------------+---------+-----------+-------+ ci-info: | 9 | fe80::/64 | :: | eth0 | U | ci-info: | 11 | local | :: | eth0 | U | ci-info: | 12 | ff00::/8 | :: | eth0 | U | ci-info: +-------+-------------+---------+-----------+-------+ Cloud-init v. 19.3-45.amzn2 running 'modules:config' at Thu, 28 Apr 2022 01:39:54 +0000. Up 11.64 seconds. Loaded plugins: extras_suggestions, langpacks, priorities, update-motd Existing lock /var/run/yum.pid: another copy is running as pid 3156. Another app is currently holding the yum lock; waiting for it to exit... The other application is: yum Memory : 32 M RSS (256 MB VSZ) Started: Thu Apr 28 01:39:53 2022 - 00:03 ago State : Running, pid: 3156 Another app is currently holding the yum lock; waiting for it to exit... The other application is: yum Memory : 86 M RSS (385 MB VSZ) Started: Thu Apr 28 01:39:53 2022 - 00:05 ago State : Running, pid: 3156 Another app is currently holding the yum lock; waiting for it to exit... The other application is: yum Memory : 87 M RSS (385 MB VSZ) Started: Thu Apr 28 01:39:53 2022 - 00:07 ago State : Running, pid: 3156 Another app is currently holding the yum lock; waiting for it to exit... The other application is: yum Memory : 162 M RSS (461 MB VSZ) Started: Thu Apr 28 01:39:53 2022 - 00:09 ago State : Running, pid: 3156 --> 1:grub2-pc-2.06-9.amzn2.0.1.x86_64 from amzn2-core removed (updateinfo) --> rpm-libs-4.11.3-40.amzn2.0.6.x86_64 from installed removed (updateinfo) --> libtiff-4.0.3-35.amzn2.0.1.x86_64 from amzn2-core removed (updateinfo) --> selinux-policy-targeted-3.13.1-192.amzn2.6.8.noarch from amzn2-core removed (updateinfo) --> 1:grub2-tools-extra-2.06-9.amzn2.0.1.x86_64 from amzn2-core removed (updateinfo) --> systemd-sysv-219-78.amzn2.0.15.x86_64 from installed removed (updateinfo) --> 1:grub2-tools-efi-2.06-9.amzn2.0.1.x86_64 from amzn2-core removed (updateinfo) --> rpm-build-libs-4.11.3-48.amzn2.0.2.x86_64 from amzn2-core removed (updateinfo) --> 1:grub2-efi-x64-ec2-2.06-2.amzn2.0.6.x86_64 from installed removed (updateinfo) --> systemd-219-78.amzn2.0.15.x86_64 from installed removed (updateinfo) --> 1:grub2-common-2.06-2.amzn2.0.6.noarch from installed removed (updateinfo) --> systemd-libs-219-78.amzn2.0.15.x86_64 from installed removed (updateinfo) --> libgcc-7.3.1-13.amzn2.x86_64 from installed removed (updateinfo) --> 1:grub2-tools-minimal-2.06-9.amzn2.0.1.x86_64 from amzn2-core removed (updateinfo) --> selinux-policy-3.13.1-192.amzn2.6.7.noarch from installed removed (updateinfo) --> 1:grub2-pc-modules-2.06-9.amzn2.0.1.noarch from amzn2-core removed (updateinfo) --> libgomp-7.3.1-13.amzn2.x86_64 from installed removed (updateinfo) --> rpm-build-libs-4.11.3-40.amzn2.0.6.x86_64 from installed removed (updateinfo) --> 1:grub2-2.06-2.amzn2.0.6.x86_64 from installed removed (updateinfo) --> tzdata-2022a-1.amzn2.noarch from amzn2-core removed (updateinfo) --> 12:dhcp-common-4.2.5-77.amzn2.1.5.x86_64 from installed removed (updateinfo) --> libstdc++-7.3.1-14.amzn2.x86_64 from amzn2-core removed (updateinfo) --> rpm-4.11.3-48.amzn2.0.2.x86_64 from amzn2-core removed (updateinfo) --> expat-2.1.0-12.amzn2.0.3.x86_64 from installed removed (updateinfo) --> 1:grub2-tools-2.06-9.amzn2.0.1.x86_64 from amzn2-core removed (updateinfo) --> 12:dhcp-libs-4.2.5-77.amzn2.1.5.x86_64 from installed removed (updateinfo) --> 1:grub2-tools-minimal-2.06-2.amzn2.0.6.x86_64 from installed removed (updateinfo) --> rpm-libs-4.11.3-48.amzn2.0.2.x86_64 from amzn2-core removed (updateinfo) --> systemd-219-78.amzn2.0.16.x86_64 from amzn2-core removed (updateinfo) --> 1:grub2-tools-2.06-2.amzn2.0.6.x86_64 from installed removed (updateinfo) --> python2-rpm-4.11.3-48.amzn2.0.2.x86_64 from amzn2-core removed (updateinfo) --> rpm-plugin-systemd-inhibit-4.11.3-48.amzn2.0.2.x86_64 from amzn2-core removed (updateinfo) --> systemd-sysv-219-78.amzn2.0.16.x86_64 from amzn2-core removed (updateinfo) --> systemd-libs-219-78.amzn2.0.16.x86_64 from amzn2-core removed (updateinfo) --> 12:dhcp-common-4.2.5-77.amzn2.1.6.x86_64 from amzn2-core removed (updateinfo) --> tzdata-2021e-1.amzn2.noarch from installed removed (updateinfo) --> 1:grub2-2.06-9.amzn2.0.1.x86_64 from amzn2-core removed (updateinfo) --> libgomp-7.3.1-14.amzn2.x86_64 from amzn2-core removed (updateinfo) --> 12:dhcp-libs-4.2.5-77.amzn2.1.6.x86_64 from amzn2-core removed (updateinfo) --> libtiff-4.0.3-35.amzn2.x86_64 from installed removed (updateinfo) --> rpm-plugin-systemd-inhibit-4.11.3-40.amzn2.0.6.x86_64 from installed removed (updateinfo) --> selinux-policy-targeted-3.13.1-192.amzn2.6.7.noarch from installed removed (updateinfo) --> libgcc-7.3.1-14.amzn2.x86_64 from amzn2-core removed (updateinfo) --> python2-rpm-4.11.3-40.amzn2.0.6.x86_64 from installed removed (updateinfo) --> selinux-policy-3.13.1-192.amzn2.6.8.noarch from amzn2-core removed (updateinfo) --> expat-2.1.0-12.amzn2.0.4.x86_64 from amzn2-core removed (updateinfo) --> 12:dhclient-4.2.5-77.amzn2.1.5.x86_64 from installed removed (updateinfo) --> 12:dhclient-4.2.5-77.amzn2.1.6.x86_64 from amzn2-core removed (updateinfo) --> 1:grub2-pc-modules-2.06-2.amzn2.0.6.noarch from installed removed (updateinfo) --> 1:grub2-pc-2.06-2.amzn2.0.6.x86_64 from installed removed (updateinfo) --> libstdc++-7.3.1-13.amzn2.x86_64 from installed removed (updateinfo) --> rpm-4.11.3-40.amzn2.0.6.x86_64 from installed removed (updateinfo) --> 1:grub2-efi-x64-ec2-2.06-9.amzn2.0.1.x86_64 from amzn2-core removed (updateinfo) --> 1:grub2-common-2.06-9.amzn2.0.1.noarch from amzn2-core removed (updateinfo) 4 package(s) needed (+0 related) for security, out of 32 available Resolving Dependencies --> Running transaction check ---> Package gzip.x86_64 0:1.5-10.amzn2 will be updated ---> Package gzip.x86_64 0:1.5-10.amzn2.0.1 will be an update ---> Package python-pillow.x86_64 0:2.0.0-21.gitd1c6db8.amzn2.0.1 will be updated ---> Package python-pillow.x86_64 0:2.0.0-23.gitd1c6db8.amzn2.0.1 will be an update ---> Package xz.x86_64 0:5.2.2-1.amzn2.0.2 will be updated ---> Package xz.x86_64 0:5.2.2-1.amzn2.0.3 will be an update ---> Package xz-libs.x86_64 0:5.2.2-1.amzn2.0.2 will be updated ---> Package xz-libs.x86_64 0:5.2.2-1.amzn2.0.3 will be an update --> Finished Dependency Resolution Dependencies Resolved ================================================================================ Package Arch Version Repository Size ================================================================================ Updating: gzip x86_64 1.5-10.amzn2.0.1 amzn2-core 129 k python-pillow x86_64 2.0.0-23.gitd1c6db8.amzn2.0.1 amzn2-core 440 k xz x86_64 5.2.2-1.amzn2.0.3 amzn2-core 228 k xz-libs x86_64 5.2.2-1.amzn2.0.3 amzn2-core 104 k Transaction Summary ================================================================================ Upgrade 4 Packages Total download size: 901 k Downloading packages: Delta RPMs disabled because /usr/bin/applydeltarpm not installed. -------------------------------------------------------------------------------- Total 5.0 MB/s | 901 kB 00:00 Running transaction check Running transaction test Transaction test succeeded Running transaction Updating : xz-libs-5.2.2-1.amzn2.0.3.x86_64 1/8 Updating : xz-5.2.2-1.amzn2.0.3.x86_64 2/8 Updating : python-pillow-2.0.0-23.gitd1c6db8.amzn2.0.1.x86_64 3/8 Updating : gzip-1.5-10.amzn2.0.1.x86_64 4/8 Cleanup : xz-5.2.2-1.amzn2.0.2.x86_64 5/8 Cleanup : python-pillow-2.0.0-21.gitd1c6db8.amzn2.0.1.x86_64 6/8 Cleanup : xz-libs-5.2.2-1.amzn2.0.2.x86_64 7/8 Cleanup : gzip-1.5-10.amzn2.x86_64 8/8 Verifying : xz-libs-5.2.2-1.amzn2.0.3.x86_64 1/8 Verifying : gzip-1.5-10.amzn2.0.1.x86_64 2/8 Verifying : xz-5.2.2-1.amzn2.0.3.x86_64 3/8 Verifying : python-pillow-2.0.0-23.gitd1c6db8.amzn2.0.1.x86_64 4/8 Verifying : xz-5.2.2-1.amzn2.0.2.x86_64 5/8 Verifying : gzip-1.5-10.amzn2.x86_64 6/8 Verifying : xz-libs-5.2.2-1.amzn2.0.2.x86_64 7/8 Verifying : python-pillow-2.0.0-21.gitd1c6db8.amzn2.0.1.x86_64 8/8 Updated: gzip.x86_64 0:1.5-10.amzn2.0.1 python-pillow.x86_64 0:2.0.0-23.gitd1c6db8.amzn2.0.1 xz.x86_64 0:5.2.2-1.amzn2.0.3 xz-libs.x86_64 0:5.2.2-1.amzn2.0.3 Complete! Cloud-init v. 19.3-45.amzn2 running 'modules:final' at Thu, 28 Apr 2022 01:40:09 +0000. Up 27.00 seconds. ci-info: no authorized ssh keys fingerprints found for user ec2-user. Cloud-init v. 19.3-45.amzn2 finished at Thu, 28 Apr 2022 01:40:10 +0000. Datasource DataSourceEc2. Up 27.35 seconds
最後に
ユーザデータに記載したコマンドがEC2で上手く実行できていないようであれば、EC2インスタンスが作成された段階でネットワーク経路が確保できているかを確認してみると良いかもしれないです。
以上、Terraformをいじりまくってるつくぼしでした!